import matplotlib.pyplot as plt
import numpy as np
import os
import PIL
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
from tensorflow.keras import Sequential
from PIL import Image
! pip install -q kaggle
!mkdir -p ~/.kaggle
!cp kaggle.json ~/.kaggle/
!ls ~/.kaggle
!chmod 600 /root/.kaggle/kaggle.json
kaggle.json chmod: cannot access '/root/.kaggle/kaggle.json': Permission denied
!kaggle datasets download -d 'alessiocorrado99/animals10'
Warning: Your Kaggle API key is readable by other users on this system! To fix this, you can run 'chmod 600 /home/kkusik/.kaggle/kaggle.json' animals10.zip: Skipping, found more recently modified local copy (use --force to force download)
from os import path
if(path.exists('animals')==False):
! unzip animals10.zip -d animals
! ls animals/raw-img/scoiattolo/ | wc -l
ls: cannot access 'animals/raw-img/scoiattolo/': No such file or directory 0
import os
#dir = os.listdir('/content/')
canedir = 'animals/raw-img/dog/'
cavallo = 'animals/raw-img/horse/'
elefante = 'animals/raw-img/elephant/'
farfalla = 'animals/raw-img/butterfly/'
gallina = 'animals/raw-img/chicken/'
gatto = 'animals/raw-img/cat/'
mucca = 'animals/raw-img/cow/'
pecora = 'animals/raw-img/sheepa/'
ragno = 'animals/raw-img/spider/'
scoiattolo = 'animals/raw-img/squirrel/'
import pathlib
path = pathlib.Path.cwd() / scoiattolo
image_count = len(list(path.glob('*')))
print(image_count)
print(path)
1862 /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/animals/raw-img/squirrel
wiewiorka = list(path.glob('*'))
PIL.Image.open(str(wiewiorka[0]))
img_height = 190
img_width = 190
batch_size = 30
path = pathlib.Path.cwd() / 'animals/raw-img'
train_ds = tf.keras.preprocessing.image_dataset_from_directory(
path,
validation_split=0.2,
subset="training",
seed=123,
image_size=(img_height, img_width),
batch_size=batch_size
)
Found 26176 files belonging to 10 classes. Using 20941 files for training.
val_ds = tf.keras.preprocessing.image_dataset_from_directory(
path,
validation_split=0.2,
subset="validation",
seed=123,
image_size=(img_height, img_width),
batch_size=batch_size
)
class_names = train_ds.class_names
print(class_names)
Found 26176 files belonging to 10 classes. Using 5235 files for validation. ['butterfly', 'cat', 'chicken', 'cow', 'dog', 'elephant', 'horse', 'sheep', 'spider', 'squirrel']
import matplotlib.pyplot as plt
tf.keras.layers.experimental.preprocessing.Resizing(
img_height, img_width, interpolation='bilinear', name=None
)
plt.figure(figsize=(10, 10))
for images, labels in train_ds.take(1):
for i in range(25):
ax = plt.subplot(5, 5, i + 1)
plt.imshow(images[i].numpy().astype("uint8"))
plt.title(class_names[labels[i]])
plt.axis("off")
for image_batch, labels_batch in train_ds:
print(image_batch.shape)
print(labels_batch.shape)
break
(30, 190, 190, 3) (30,)
AUTOTUNE = tf.data.AUTOTUNE
train_ds = train_ds.cache().shuffle(1000).prefetch(buffer_size=AUTOTUNE)
val_ds = val_ds.cache().prefetch(buffer_size=AUTOTUNE)
normalization_layer = layers.experimental.preprocessing.Rescaling(1./255)
normalized_ds = train_ds.map(lambda x, y: (normalization_layer(x), y))
image_batch, labels_batch = next(iter(normalized_ds))
first_image = image_batch[0]
# Notice the pixels values are now in `[0,1]`.
print(np.min(first_image), np.max(first_image))
0.0 0.94013244
num_classes = 10
model = Sequential([
layers.experimental.preprocessing.Rescaling(1./255, input_shape=(img_height, img_width, 3)),
layers.Conv2D(40, 3, input_shape=(img_height, img_width, 3), padding='same', activation='relu'),
layers.MaxPooling2D(),
layers.Conv2D(80, 3, padding='same', activation='relu'),
layers.MaxPooling2D(),
layers.Conv2D(160, 3, padding='same', activation='relu'),
layers.Conv2D(160, 3, padding='same', activation='relu'),
layers.MaxPooling2D(),
layers.Flatten(),
layers.Dense(320, activation='relu'),
layers.Dense(num_classes)
])
model.compile(optimizer='adam',
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
metrics=['accuracy'])
model.summary()
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= rescaling_1 (Rescaling) (None, 150, 150, 3) 0 _________________________________________________________________ conv2d (Conv2D) (None, 150, 150, 16) 448 _________________________________________________________________ max_pooling2d (MaxPooling2D) (None, 75, 75, 16) 0 _________________________________________________________________ conv2d_1 (Conv2D) (None, 75, 75, 32) 4640 _________________________________________________________________ max_pooling2d_1 (MaxPooling2 (None, 37, 37, 32) 0 _________________________________________________________________ conv2d_2 (Conv2D) (None, 37, 37, 64) 18496 _________________________________________________________________ conv2d_3 (Conv2D) (None, 37, 37, 64) 36928 _________________________________________________________________ max_pooling2d_2 (MaxPooling2 (None, 18, 18, 64) 0 _________________________________________________________________ flatten (Flatten) (None, 20736) 0 _________________________________________________________________ dense (Dense) (None, 128) 2654336 _________________________________________________________________ dense_1 (Dense) (None, 10) 1290 ================================================================= Total params: 2,716,138 Trainable params: 2,716,138 Non-trainable params: 0 _________________________________________________________________
epochs = 10
history = model.fit(
train_ds,
validation_data=val_ds,
epochs=epochs
)
Epoch 1/10 655/655 [==============================] - 249s 379ms/step - loss: 1.9280 - accuracy: 0.3282 - val_loss: 1.4279 - val_accuracy: 0.5219 Epoch 2/10 655/655 [==============================] - 230s 351ms/step - loss: 1.2420 - accuracy: 0.5773 - val_loss: 1.1893 - val_accuracy: 0.5990 Epoch 3/10 655/655 [==============================] - 228s 348ms/step - loss: 0.9253 - accuracy: 0.6868 - val_loss: 1.1835 - val_accuracy: 0.6248 Epoch 4/10 655/655 [==============================] - 249s 380ms/step - loss: 0.6695 - accuracy: 0.7769 - val_loss: 1.3249 - val_accuracy: 0.6180 Epoch 5/10 655/655 [==============================] - 240s 367ms/step - loss: 0.4224 - accuracy: 0.8604 - val_loss: 1.3271 - val_accuracy: 0.6180 Epoch 6/10 655/655 [==============================] - 235s 359ms/step - loss: 0.2327 - accuracy: 0.9215 - val_loss: 1.6615 - val_accuracy: 0.6392 Epoch 7/10 655/655 [==============================] - 239s 365ms/step - loss: 0.1312 - accuracy: 0.9585 - val_loss: 1.9260 - val_accuracy: 0.6304 Epoch 8/10 655/655 [==============================] - 242s 370ms/step - loss: 0.1086 - accuracy: 0.9636 - val_loss: 2.1734 - val_accuracy: 0.6180 Epoch 9/10 655/655 [==============================] - 237s 362ms/step - loss: 0.0817 - accuracy: 0.9725 - val_loss: 2.2388 - val_accuracy: 0.6319 Epoch 10/10 655/655 [==============================] - 241s 369ms/step - loss: 0.0647 - accuracy: 0.9785 - val_loss: 2.5023 - val_accuracy: 0.6153
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs_range = range(epochs)
plt.figure(figsize=(8, 8))
plt.subplot(1, 2, 1)
plt.plot(epochs_range, acc, label='Training Accuracy')
plt.plot(epochs_range, val_acc, label='Validation Accuracy')
plt.legend(loc='lower right')
plt.title('Training and Validation Accuracy')
plt.subplot(1, 2, 2)
plt.plot(epochs_range, loss, label='Training Loss')
plt.plot(epochs_range, val_loss, label='Validation Loss')
plt.legend(loc='upper right')
plt.title('Training and Validation Loss')
plt.show()
data_augmentation = keras.Sequential(
[
layers.experimental.preprocessing.RandomFlip("horizontal", input_shape=(img_height, img_width,3)),
layers.experimental.preprocessing.RandomRotation(0.1),
layers.experimental.preprocessing.RandomZoom(0.1),
]
)
plt.figure(figsize=(10, 10))
for images, _ in train_ds.take(1):
for i in range(9):
augmented_images = data_augmentation(images)
ax = plt.subplot(3, 3, i + 1)
plt.imshow(augmented_images[0].numpy().astype("uint8"))
plt.axis("off")
num_classes = 10
model = Sequential([
data_augmentation,
layers.experimental.preprocessing.Rescaling(1./255),
layers.Conv2D(40, 3, input_shape=(img_height, img_width, 3), padding='same', activation='relu'),
layers.MaxPooling2D(),
layers.Conv2D(80, 3, padding='same', activation='relu'),
layers.MaxPooling2D(),
layers.Conv2D(160, 3, padding='same', activation='relu'),
layers.Conv2D(160, 3, padding='same', activation='relu'),
layers.MaxPooling2D(),
layers.Flatten(),
layers.Dense(160, activation='relu'),
layers.Dense(num_classes)
])
model.compile(optimizer='adam',
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
metrics=['accuracy'])
model.summary()
Model: "sequential_1" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= sequential (Sequential) (None, 190, 190, 3) 0 _________________________________________________________________ rescaling_1 (Rescaling) (None, 190, 190, 3) 0 _________________________________________________________________ conv2d (Conv2D) (None, 190, 190, 40) 1120 _________________________________________________________________ max_pooling2d (MaxPooling2D) (None, 95, 95, 40) 0 _________________________________________________________________ conv2d_1 (Conv2D) (None, 95, 95, 80) 28880 _________________________________________________________________ max_pooling2d_1 (MaxPooling2 (None, 47, 47, 80) 0 _________________________________________________________________ conv2d_2 (Conv2D) (None, 47, 47, 160) 115360 _________________________________________________________________ conv2d_3 (Conv2D) (None, 47, 47, 160) 230560 _________________________________________________________________ max_pooling2d_2 (MaxPooling2 (None, 23, 23, 160) 0 _________________________________________________________________ flatten (Flatten) (None, 84640) 0 _________________________________________________________________ dense (Dense) (None, 160) 13542560 _________________________________________________________________ dense_1 (Dense) (None, 10) 1610 ================================================================= Total params: 13,920,090 Trainable params: 13,920,090 Non-trainable params: 0 _________________________________________________________________
epochs = 20
history = model.fit(
train_ds,
validation_data=val_ds,
epochs=epochs
)
Epoch 1/20 699/699 [==============================] - 1290s 2s/step - loss: 2.1541 - accuracy: 0.2572 - val_loss: 1.6608 - val_accuracy: 0.4399 Epoch 2/20 699/699 [==============================] - 1317s 2s/step - loss: 1.5411 - accuracy: 0.4737 - val_loss: 1.3845 - val_accuracy: 0.5377 Epoch 3/20 699/699 [==============================] - 1273s 2s/step - loss: 1.2799 - accuracy: 0.5684 - val_loss: 1.1667 - val_accuracy: 0.5996 Epoch 4/20 699/699 [==============================] - 1215s 2s/step - loss: 1.1423 - accuracy: 0.6088 - val_loss: 1.1022 - val_accuracy: 0.6279 Epoch 5/20 699/699 [==============================] - 1169s 2s/step - loss: 1.0277 - accuracy: 0.6517 - val_loss: 1.0700 - val_accuracy: 0.6355 Epoch 6/20 699/699 [==============================] - 1171s 2s/step - loss: 0.9317 - accuracy: 0.6846 - val_loss: 0.9711 - val_accuracy: 0.6714 Epoch 7/20 699/699 [==============================] - 1171s 2s/step - loss: 0.8745 - accuracy: 0.7067 - val_loss: 0.9461 - val_accuracy: 0.6825 Epoch 8/20 699/699 [==============================] - 1169s 2s/step - loss: 0.8257 - accuracy: 0.7263 - val_loss: 1.0468 - val_accuracy: 0.6607 Epoch 9/20 699/699 [==============================] - 1169s 2s/step - loss: 0.7954 - accuracy: 0.7296 - val_loss: 1.0181 - val_accuracy: 0.6606 Epoch 10/20 699/699 [==============================] - 1170s 2s/step - loss: 0.7507 - accuracy: 0.7481 - val_loss: 1.0353 - val_accuracy: 0.6596 Epoch 11/20 699/699 [==============================] - 1235s 2s/step - loss: 0.7490 - accuracy: 0.7446 - val_loss: 0.9528 - val_accuracy: 0.7033 Epoch 12/20 699/699 [==============================] - 1176s 2s/step - loss: 0.7077 - accuracy: 0.7596 - val_loss: 1.0178 - val_accuracy: 0.6669 Epoch 13/20 699/699 [==============================] - 1177s 2s/step - loss: 0.6620 - accuracy: 0.7776 - val_loss: 0.8986 - val_accuracy: 0.7095 Epoch 14/20 699/699 [==============================] - 1185s 2s/step - loss: 0.6544 - accuracy: 0.7783 - val_loss: 0.9461 - val_accuracy: 0.7011 Epoch 15/20 699/699 [==============================] - 1180s 2s/step - loss: 0.6054 - accuracy: 0.7954 - val_loss: 1.0160 - val_accuracy: 0.6804 Epoch 16/20 699/699 [==============================] - 1176s 2s/step - loss: 0.5939 - accuracy: 0.7989 - val_loss: 0.9900 - val_accuracy: 0.6896 Epoch 17/20 699/699 [==============================] - 1175s 2s/step - loss: 0.5793 - accuracy: 0.8005 - val_loss: 1.0587 - val_accuracy: 0.6789 Epoch 18/20 699/699 [==============================] - 1183s 2s/step - loss: 0.5700 - accuracy: 0.8044 - val_loss: 0.9791 - val_accuracy: 0.6955 Epoch 19/20 699/699 [==============================] - 1171s 2s/step - loss: 0.5427 - accuracy: 0.8181 - val_loss: 0.8697 - val_accuracy: 0.7261 Epoch 20/20 699/699 [==============================] - 1189s 2s/step - loss: 0.5221 - accuracy: 0.8247 - val_loss: 0.9734 - val_accuracy: 0.6932
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs_range = range(epochs)
plt.figure(figsize=(8, 8))
plt.subplot(1, 2, 1)
plt.plot(epochs_range, acc, label='Training Accuracy')
plt.plot(epochs_range, val_acc, label='Validation Accuracy')
plt.legend(loc='lower right')
plt.title('Training and Validation Accuracy')
plt.subplot(1, 2, 2)
plt.plot(epochs_range, loss, label='Training Loss')
plt.plot(epochs_range, val_loss, label='Validation Loss')
plt.legend(loc='upper right')
plt.title('Training and Validation Loss')
plt.show()
import pathlib
anim = ['butterfly', 'cat', 'chicken', 'cow', 'dog', 'elephant', 'horse', 'sheep', 'spider', 'squirrel']
for j in anim:
test_path = pathlib.Path.cwd() / 'test_animals/{}'.format(j)
#wiewiorka_url = "https://skarpetoholik.pl/4809-large_default/pin-motyl-monarcha-.jpg"
#wiewiorka_path = tf.keras.utils.get_file('motyl3', origin=wiewiorka_url)
#img = keras.preprocessing.image.load_img(
# wiewiorka_path, target_size=(img_height, img_width)
#)
#img_array = keras.preprocessing.image.img_to_array(img)
#img_array = tf.expand_dims(img_array, 0) # Create a batch
#predictions = model.predict(img_array)
#score = tf.nn.softmax(predictions[0])
#print(
# "This image most likely belongs to {} with a {:.2f} percent confidence."
# .format(class_names[np.argmax(score)], 100 * np.max(score))
#)
plt.figure(figsize=(10, 10))
for i in range(4):
temp = (test_path / '{}.jpg' .format(j + str(i+1)))
print(temp)
img = keras.preprocessing.image.load_img(
temp, target_size=(img_height, img_width))
img_array = keras.preprocessing.image.img_to_array(img)
img_array = tf.expand_dims(img_array, 0) # Create a batch
predictions = model.predict(img_array)
score = tf.nn.softmax(predictions[0])
ax = plt.subplot(2, 2, i + 1)
plt.imshow(img)
plt.title("This image most likely belongs\n to {} with a {:.2f} percent confidence."
.format(class_names[np.argmax(score)], 100 * np.max(score)))
plt.axis("off")
/mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/butterfly/butterfly1.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/butterfly/butterfly2.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/butterfly/butterfly3.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/butterfly/butterfly4.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/cat/cat1.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/cat/cat2.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/cat/cat3.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/cat/cat4.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/chicken/chicken1.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/chicken/chicken2.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/chicken/chicken3.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/chicken/chicken4.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/cow/cow1.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/cow/cow2.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/cow/cow3.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/cow/cow4.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/dog/dog1.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/dog/dog2.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/dog/dog3.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/dog/dog4.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/elephant/elephant1.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/elephant/elephant2.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/elephant/elephant3.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/elephant/elephant4.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/horse/horse1.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/horse/horse2.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/horse/horse3.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/horse/horse4.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/sheep/sheep1.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/sheep/sheep2.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/sheep/sheep3.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/sheep/sheep4.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/spider/spider1.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/spider/spider2.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/spider/spider3.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/spider/spider4.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/squirrel/squirrel1.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/squirrel/squirrel2.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/squirrel/squirrel3.jpg /mnt/c/Users/pc/OneDrive/Dokumenty/PSI_animals/test_animals/squirrel/squirrel4.jpg